return IA64_FAULT;
}
+static inline int unimpl_phys_addr (u64 paddr)
+{
+ return (pa_clear_uc(paddr) >> MAX_PHYS_ADDR_BITS) != 0;
+}
+
/* We came here because the H/W VHPT walker failed to find an entry */
IA64FAULT
vmx_hpw_miss(u64 vadr, u64 vec, REGS* regs)
/* DTLB miss. */
if (misr.sp) /* Refer to SDM Vol2 Table 4-11,4-12 */
return vmx_handle_lds(regs);
+ if (unlikely(unimpl_phys_addr(vadr))) {
+ unimpl_daddr(v);
+ return IA64_FAULT;
+ }
pte = lookup_domain_mpa(v->domain, pa_clear_uc(vadr), NULL);
- /* Clear UC bit in vadr with the shifts. */
if (v->domain != dom0 && (pte & GPFN_IO_MASK)) {
emulate_io_inst(v, pa_clear_uc(vadr), 4, pte);
return IA64_FAULT;
}
+ } else {
+ if (unlikely(unimpl_phys_addr(vadr))) {
+ unimpl_iaddr_trap(v, vadr);
+ return IA64_FAULT;
+ }
}
physical_tlb_miss(v, vadr, type);
return IA64_FAULT;
}
#ifdef VMAL_NO_FAULT_CHECK
if (unimplemented_gva(vcpu,r3) ) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
}
#ifdef VMAL_NO_FAULT_CHECK
if (unimplemented_gva(vcpu,r3) ) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
}
#ifdef VMAL_NO_FAULT_CHECK
if (unimplemented_gva(vcpu,r3) ) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
return IA64_FAULT;
}
if (unimplemented_gva(vcpu,r3) ) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
return IA64_FAULT;
}
if (unimplemented_gva(vcpu, ifa)) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
return IA64_FAULT;
}
if (unimplemented_gva(vcpu, ifa)) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
}
#ifdef VMAL_NO_FAULT_CHECK
if (unimplemented_gva(vcpu,ifa) ) {
- isr.val = set_isr_ei_ni(vcpu);
- isr.code = IA64_RESERVED_REG_FAULT;
- vcpu_set_isr(vcpu, isr.val);
unimpl_daddr(vcpu);
return IA64_FAULT;
}
static inline void
unimpl_daddr (VCPU *vcpu)
{
+ ISR isr;
+
+ isr.val = set_isr_ei_ni(vcpu);
+ isr.code = IA64_UNIMPL_DADDR_FAULT;
+ vcpu_set_isr(vcpu, isr.val);
_general_exception(vcpu);
}
set_ifa_itir_iha(vcpu, vadr, 1, 1, 0);
inject_guest_interruption(vcpu, IA64_DATA_ACCESS_RIGHTS_VECTOR);
}
+
+/*
+ * Unimplement Instruction Address Trap
+ * @ Lower-Privilege Transfer Trap Vector
+ * Refer to SDM Vol2 Table 5-6 & 8-1
+ */
+static inline void
+unimpl_iaddr_trap (VCPU *vcpu, u64 vadr)
+{
+ ISR isr;
+
+ isr.val = set_isr_ei_ni(vcpu);
+ isr.code = IA64_UNIMPL_IADDR_TRAP;
+ vcpu_set_isr(vcpu, isr.val);
+ vcpu_set_ifa(vcpu, vadr);
+ inject_guest_interruption(vcpu, IA64_LOWERPRIV_TRANSFER_TRAP_VECTOR);
+}
#endif